## Loading required package: knitr

## The basic files and libraries needed for most presentations
# creates the libraries and common-functions sections
read_chunk("../common/utility_functions.R")

require(ggplot2) #for plots
require(lattice) # nicer scatter plots
require(plyr) # for processing data.frames
require(grid) # contains the arrow function
require(biOps) # for basic image processing
require(doMC) # for parallel code
require(png) # for reading png images
require(gridExtra)
require(reshape2) # for the melt function
## To install EBImage
# source("http://bioconductor.org/biocLite.R")
# biocLite("EBImage")
require(EBImage) # for more image processing
used.libraries<-c("ggplot2","lattice","plyr","reshape2","grid","gridExtra","biOps","png","EBImage")

# start parallel environment
registerDoMC()
# functions for converting images back and forth
im.to.df<-function(in.img) {
  out.im<-expand.grid(x=1:nrow(in.img),y=1:ncol(in.img))
  out.im$val<-as.vector(in.img)
  out.im
}
df.to.im<-function(in.df,val.col="val",inv=F) {
  in.vals<-in.df[[val.col]]
  if(class(in.vals[1])=="logical") in.vals<-as.integer(in.vals*255)
  if(inv) in.vals<-255-in.vals
  out.mat<-matrix(in.vals,nrow=length(unique(in.df$x)),byrow=F)
  attr(out.mat,"type")<-"grey"
  out.mat
}
ddply.cutcols<-function(...,cols=1) {
  # run standard ddply command 
  cur.table<-ddply(...)
  cutlabel.fixer<-function(oVal) {
    sapply(oVal,function(x) {
      cnv<-as.character(x)
      mean(as.numeric(strsplit(substr(cnv,2,nchar(cnv)-1),",")[[1]]))
    })
  }
  cutname.fixer<-function(c.str) {
    s.str<-strsplit(c.str,"(",fixed=T)[[1]]
    t.str<-strsplit(paste(s.str[c(2:length(s.str))],collapse="("),",")[[1]]
    paste(t.str[c(1:length(t.str)-1)],collapse=",")
  }
  for(i in c(1:cols)) {
    cur.table[,i]<-cutlabel.fixer(cur.table[,i])
    names(cur.table)[i]<-cutname.fixer(names(cur.table)[i])
  }
  cur.table
}

show.pngs.as.grid<-function(file.list,title.fun,zoom=1) {
  preparePng<-function(x) rasterGrob(readPNG(x,native=T,info=T),width=unit(zoom,"npc"),interp=F)
  labelPng<-function(x,title="junk") (qplot(1:300, 1:300, geom="blank",xlab=NULL,ylab=NULL,asp=1)+
                                        annotation_custom(preparePng(x))+
                                        labs(title=title)+theme_bw(24)+
                                        theme(axis.text.x = element_blank(),
                                              axis.text.y = element_blank()))
  imgList<-llply(file.list,function(x) labelPng(x,title.fun(x)) )
  do.call(grid.arrange,imgList)
}
## Standard image processing tools which I use for visualizing the examples in the script
commean.fun<-function(in.df) {
  ddply(in.df,.(val), function(c.cell) {
    weight.sum<-sum(c.cell$weight)
    data.frame(xv=mean(c.cell$x),
               yv=mean(c.cell$y),
               xm=with(c.cell,sum(x*weight)/weight.sum),
               ym=with(c.cell,sum(y*weight)/weight.sum)
    )
  })
}

colMeans.df<-function(x,...) as.data.frame(t(colMeans(x,...)))

pca.fun<-function(in.df) {
  ddply(in.df,.(val), function(c.cell) {
    c.cell.cov<-cov(c.cell[,c("x","y")])
    c.cell.eigen<-eigen(c.cell.cov)
    
    c.cell.mean<-colMeans.df(c.cell[,c("x","y")])
    out.df<-cbind(c.cell.mean,
                  data.frame(vx=c.cell.eigen$vectors[1,],
                             vy=c.cell.eigen$vectors[2,],
                             vw=sqrt(c.cell.eigen$values),
                             th.off=atan2(c.cell.eigen$vectors[2,],c.cell.eigen$vectors[1,]))
    )
  })
}
vec.to.ellipse<-function(pca.df) {
  ddply(pca.df,.(val),function(cur.pca) {
    # assume there are two vectors now
    create.ellipse.points(x.off=cur.pca[1,"x"],y.off=cur.pca[1,"y"],
                          b=sqrt(5)*cur.pca[1,"vw"],a=sqrt(5)*cur.pca[2,"vw"],
                          th.off=pi/2-atan2(cur.pca[1,"vy"],cur.pca[1,"vx"]),
                          x.cent=cur.pca[1,"x"],y.cent=cur.pca[1,"y"])
  })
}

# test function for ellipse generation
# ggplot(ldply(seq(-pi,pi,length.out=100),function(th) create.ellipse.points(a=1,b=2,th.off=th,th.val=th)),aes(x=x,y=y))+geom_path()+facet_wrap(~th.val)+coord_equal()
create.ellipse.points<-function(x.off=0,y.off=0,a=1,b=NULL,th.off=0,th.max=2*pi,pts=36,...) {
  if (is.null(b)) b<-a
  th<-seq(0,th.max,length.out=pts)
  data.frame(x=a*cos(th.off)*cos(th)+b*sin(th.off)*sin(th)+x.off,
             y=-1*a*sin(th.off)*cos(th)+b*cos(th.off)*sin(th)+y.off,
             id=as.factor(paste(x.off,y.off,a,b,th.off,pts,sep=":")),...)
}
deform.ellipse.draw<-function(c.box) {
  create.ellipse.points(x.off=c.box$x[1],
                        y.off=c.box$y[1],
                        a=c.box$a[1],
                        b=c.box$b[1],
                        th.off=c.box$th[1],
                        col=c.box$col[1])                    
}
bbox.fun<-function(in.df) {
  ddply(in.df,.(val), function(c.cell) {
    c.cell.mean<-colMeans.df(c.cell[,c("x","y")])
    xmn<-emin(c.cell$x)
    xmx<-emax(c.cell$x)
    ymn<-emin(c.cell$y)
    ymx<-emax(c.cell$y)
    out.df<-cbind(c.cell.mean,
                  data.frame(xi=c(xmn,xmn,xmx,xmx,xmn),
                             yi=c(ymn,ymx,ymx,ymn,ymn),
                             xw=xmx-xmn,
                             yw=ymx-ymn
                  ))
  })
}

# since the edge of the pixel is 0.5 away from the middle of the pixel
emin<-function(...) min(...)-0.5
emax<-function(...) max(...)+0.5
extents.fun<-function(in.df) {
  ddply(in.df,.(val), function(c.cell) {
    c.cell.mean<-colMeans.df(c.cell[,c("x","y")])
    out.df<-cbind(c.cell.mean,data.frame(xmin=c(c.cell.mean$x,emin(c.cell$x)),
                                         xmax=c(c.cell.mean$x,emax(c.cell$x)),
                                         ymin=c(emin(c.cell$y),c.cell.mean$y),
                                         ymax=c(emax(c.cell$y),c.cell.mean$y)))
  })
}

common.image.path<-"../common"
qbi.file<-function(file.name) file.path(common.image.path,"figures",file.name)
qbi.data<-function(file.name) file.path(common.image.path,"data",file.name)

th_fillmap.fn<-function(max.val) scale_fill_gradientn(colours=rainbow(10),limits=c(0,max.val))

Quantitative Big Imaging

author: Kevin Mader date: 5 March 2015 width: 1440 height: 900 css: ../common/template.css transition: rotate

Basic Segmentation and Discrete Binary Structures

Course Outline

source('../common/schedule.R')

Lesson Outline

Literature / Useful References

Motivation: Why do we do imaging experiments?

#incremental: true


To test a hypothesis

2560 x 2560 x 2160 x 32 bit = 56GB / sample

What did we want in the first place

#incremental: true

Single number:

Why do we perform segmentation?

Review: Filtering and Image Enhancement

incremental: true

What did people used to do?


Model-based Analysis

\[ S_{measured}(\vec{x}) = F_{system}(S_{stimulus}(\vec{x}),S_{sample}(\vec{x})) \]

Direct Imaging (simple)


grad.beam.profile<-matrix(rep(0.9+0.00*c(1:nrow(gray.img)),ncol(gray.img)),dim(gray.img))
sample.img<-1.0*exp(-material.img*0.1)
ccd.img<-sample.img * grad.beam.profile
model.image<-subset(
  rbind(
  cbind(im.to.df(grad.beam.profile),type="Beam"),
  cbind(im.to.df(sample.img),type="Sample"),
  cbind(im.to.df(ccd.img),type="Detector")
  ),
  x%%3==0 & y%%3==0 # downsample by 9
  )
ggplot(model.image,aes(x=y,y=x,fill=val))+
  geom_tile()+
  guides(fill=F)+
  facet_grid(type~.)+
  coord_equal()+
  theme_bw(20)
Single Cell

Nonuniform Beam-Profiles

In many setups there is un-even illumination caused by incorrectly adjusted equipment and fluctations in power and setups

grad.beam.profile<-matrix(rep(0.9+0.001*c(1:nrow(gray.img)),ncol(gray.img)),dim(gray.img))
sample.img<-1.0*exp(-material.img*0.1)
ccd.img<-sample.img * grad.beam.profile
model.image<-subset(
  rbind(
  cbind(im.to.df(grad.beam.profile),type="Beam"),
  cbind(im.to.df(sample.img),type="Sample"),
  cbind(im.to.df(ccd.img),type="Detector")
  ),
  x%%3==0 & y%%3==0 # downsample by 9
  )
ggplot(model.image,aes(x=y,y=x,fill=val))+
  geom_tile()+
  facet_grid(type~.)+
  coord_equal()+
  labs(fill="")+
  scale_fill_gradientn(colours=rainbow(6))+
  theme_bw(20)
Gradient Profile


Frequently there is a fall-off of the beam away from the center (as is the case of a Gaussian beam which frequently shows up for laser systems). This can make extracting detail away from the center challenging

beam.profile<-mutate(
  expand.grid(x=1:nrow(material.img),y=1:ncol(material.img)),
  val=exp(-((x-mean(x))^2+(y-mean(y))^2)/350^2) # just a simple gaussian profile
  )
beam.profile<-df.to.im(beam.profile)
sample.img<-1.0*exp(-material.img*0.1)
ccd.img<-sample.img * beam.profile
model.image<-subset(
  rbind(
  cbind(im.to.df(beam.profile),type="Beam"),
  cbind(im.to.df(sample.img),type="Sample"),
  cbind(im.to.df(ccd.img),type="Detector")
  ),
  x%%3==0 & y%%3==0 # downsample by 9
  )
ggplot(model.image,aes(x=y,y=x,fill=val))+
  geom_tile()+
  facet_grid(type~.)+
  coord_equal()+
  scale_fill_gradientn(colours=rainbow(6))+
  labs(fill="")+
  theme_bw(20)
Gaussian Beam

Absorption Imaging (X-ray, Ultrasound, Optical)


nx<-4
ny<-4
grad.im<-expand.grid(x=c(-nx:nx)/nx*2*pi,y=c(-ny:ny)/ny*2*pi)
phase.im<-runif(nrow(grad.im))
grad.im<-cbind(grad.im,
               col=1*(phase.im>0.66)+
                 2*(phase.im<0.33)+
                 0.4*runif(nrow(grad.im),min=-1,max=1))
bn.wid<-diff(range(grad.im$col))/10

bl.fun<-function(x) 1*exp(-x*0.5)

ggplot(grad.im,aes(x=col,y=bl.fun(col)))+
  geom_point(aes(color=cut_interval(col,3)))+
  geom_segment(data=data.frame(
    x=c(0.5,1.5),y=c(0.25),
    xend=c(0.5,1.5),yend=c(1.25)
    ),aes(x=x,y=y,xend=xend,yend=yend),fill="red",size=3,alpha=0.5)+
  labs(x=expression(alpha(x,y)),y=expression(paste("I"[sample],"(x,y)")),title="Probability Density Function",fill="Groups")+theme_bw(20)
Attenuation to Intensity

ggplot(grad.im,aes(x=col))+
  geom_density(aes(fill=cut_interval(col,3)))+
  geom_segment(data=data.frame(
    x=c(0.5,1.5),y=c(0),
    xend=c(0.5,1.5),yend=c(6)
    ),aes(x=x,y=y,xend=xend,yend=yend),fill="red",size=3,alpha=0.5)+
  labs(x=expression(alpha(x,y)),y="Frequency",title="Probability Density Function",fill="Groups")+theme_bw(20)
Image Histogram

Where does segmentation get us?

incremental: true

Applying a threshold to an image

Start out with a simple image of a cross with added noise \[ I(x,y) = f(x,y) \]

nx<-5
ny<-5
cross.im<-expand.grid(x=c(-nx:nx)/nx*2*pi,y=c(-ny:ny)/ny*2*pi)
cross.im<-cbind(cross.im,
               col=1.5*with(cross.im,abs(cos(x*y))/(abs(x*y)+(3*pi/nx)))+
                 0.5*runif(nrow(cross.im)))
bn.wid<-diff(range(cross.im$col))/10
ggplot(cross.im,aes(x=x,y=y,fill=col))+
  geom_tile()+
  scale_fill_gradient(low="black",high="white")+
  labs(fill="Intensity")+
  theme_bw(20)


The intensity can be described with a probability density function \[ P_f(x,y) \]

ggplot(cross.im,aes(x=col))+geom_histogram(binwidth=bn.wid)+
  labs(x="Intensity",title="Probability Density Function")+
  theme_bw(20)
Probability density function

Applying a threshold to an image

By examining the image and probability distribution function, we can deduce that the underyling model is a whitish phase that makes up the cross and the darkish background

thresh.val<-0.75
cross.im$val<-(cross.im$col>=thresh.val)
ggplot(cross.im,aes(x=x,y=y))+
  geom_tile(aes(fill=col))+
  geom_tile(data=subset(cross.im,val),fill="red",color="black",alpha=0.3)+
  geom_tile(data=subset(cross.im,!val),fill="blue",color="black",alpha=0.3)+
  scale_fill_gradient(low="black",high="white")+
  labs(fill="Intensity")+
  theme_bw(20)
With Threshold Overlay


Applying the threshold is a deceptively simple operation \[ I(x,y) = \begin{cases} 1, & f(x,y)\geq0.5 \\ 0, & f(x,y)<0.5 \end{cases}\]

ggplot(cbind(cross.im,in.thresh=cross.im$col>=thresh.val),aes(x=col))+
  geom_histogram(binwidth=bn.wid,aes(fill=in.thresh))+
  labs(x="Intensity",color="In Threshold")+scale_fill_manual(values=c("blue","red"))+
  theme_bw(20)
With Threshold Overlay

Various Thresholds

thresh.vals<-c(2:10)/10
grad.im.th<-ldply(thresh.vals,function(thresh.val) 
  cbind(cross.im,thresh=thresh.val,in.thresh=(cross.im$col>=thresh.val)))
ggplot(grad.im.th,aes(x=col))+
  geom_histogram(binwidth=bn.wid,aes(fill=in.thresh))+
  labs(x="Intensity",fill="Above Threshold")+facet_wrap(~thresh)+
  theme_bw(15)
Threshold Histograms


ggplot(grad.im.th,aes(x=x,y=y))+
  geom_tile(aes(fill=in.thresh),color="black",alpha=0.75)+
  labs(fill="Above Threshold")+facet_wrap(~thresh)+
  theme_bw(20)
Threshold Images

Segmenting Cells

cellImage<-im.to.df(jpeg::readJPEG("ext-figures/Cell_Colony.jpg"))
ggplot(cellImage,aes(x=x,y=y,fill=val))+
  geom_tile()+
  labs(fill="Intensity")+coord_equal()+
  theme_bw(20)
Cell Colony

plot of chunk unnamed-chunk-16


Different Threshold Values

th.vals<-seq(0.4,0.85,length.out=3)
thlabel<-function(x,...) switch(x,"Too low","Good","Too high")
im.vals<-ldply(1:length(th.vals),function(th.val)
  cbind(cellImage,
        in.thresh=ifelse(cellImage$val
Cell Colony


plot of chunk unnamed-chunk-18

Other Image Types

While scalar images are easiest, it is possible for any type of image \[ I(x,y) = \vec{f}(x,y) \]

nx<-7
ny<-7
n.pi<-4
grad.im<-expand.grid(x=c(-nx:nx)/nx*n.pi*pi,y=c(-ny:ny)/ny*n.pi*pi)

grad.im<-cbind(grad.im,
               col=1.5*with(grad.im,abs(cos(x*y))/(abs(x*y)+(3*pi/nx)))+
                 0.5*runif(nrow(grad.im)),
               x.vec=with(grad.im,y),
               y.vec=with(grad.im,x))
# normalize vector
grad.im[,c("x.vec","y.vec")]<-with(grad.im,cbind(x.vec/(sqrt(x.vec^2+y.vec^2)),
                                         y.vec/(sqrt(x.vec^2+y.vec^2))))
bn.wid<-c(diff(range(grad.im$x.vec))/10,diff(range(grad.im$y.vec))/10)
ggplot(grad.im,aes(x=x,y=y,fill=col))+
  geom_segment(aes(xend=x+x.vec,yend=y+y.vec),arrow=arrow(length = unit(0.15,"cm")),size=2)+
  scale_fill_gradient(low="black",high="white")+
  guides(fill=F)+labs(title="Orientation Map")+
  theme_bw(20)


The intensity can be described with two seperate or a single joint probability density function \[ P_{\vec{f}\cdot \vec{i}}(x,y), P_{\vec{f}\cdot \vec{j}}(x,y) \]

ggplot(grad.im,aes(x=x.vec,y=y.vec))+
  stat_bin2d(binwidth=c(0.25,.25),drop=F)+
  labs(x="Pfx",y="Pfy",fill="Frequency",title="Orientation Histogram")+
  xlim(-1,1)+ylim(-1,1)+
  theme_bw(20)
With Threshold Overlay

Applying a threshold

A threshold is now more difficult to apply since there are now two distinct variables to deal with. The standard approach can be applied to both \[ I(x,y) = \begin{cases} 1, & \vec{f}_x(x,y) \geq0.25 \text{ and}\\ & \vec{f}_y(x,y) \geq0.25 \\ 0, & \text{otherwise} \end{cases}\]

g.with.thresh<-cbind(grad.im,in.thresh=with(grad.im,x.vec>0.25 & y.vec>0.25))
ggplot(g.with.thresh,
       aes(x=x,y=y,fill=in.thresh,color=in.thresh))+
  geom_tile(alpha=0.5,aes(fill=in.thresh))+
  geom_segment(aes(xend=x+x.vec,yend=y+y.vec),arrow=arrow(length = unit(0.2,"cm")),size=3)+
  labs(color="In Threshold")+guides(fill=FALSE)+
  theme_bw(20)


This can also be shown on the joint probability distribution as

bn.wid<-c(0.25,.25)
keep.bins<-expand.grid(x.vec=seq(-1,1,bn.wid[1]/10),y.vec=seq(-1,1,bn.wid[2]/10))
keep.bins<-cbind(keep.bins,in.thresh=with(keep.bins,x.vec>0.25 & y.vec>0.25))
ggplot(g.with.thresh,aes(x=x.vec,y=y.vec))+
  stat_bin2d(binwidth=bn.wid,drop=F)+
  geom_tile(data=subset(g.with.thresh,in.thresh),fill="red",alpha=0.4)+
  labs(x="Pfx",y="Pfy",fill="Threshold")+xlim(-1,1)+ylim(-1,1)+
  theme_bw(20)
With Threshold Overlay

Applying a threshold

Given the presence of two variables; however, more advanced approaches can also be investigated. For example we can keep only components parallel to the x axis by using the dot product. \[ I(x,y) = \begin{cases} 1, & |\vec{f}(x,y)\cdot \vec{i}| = 1 \\ 0, & \text{otherwise} \end{cases}\]

i.vec<-c(1,0)
j.vec<-c(0,1)
g.cmp.thresh<-cbind(grad.im,in.thresh=with(grad.im,
                                            abs(x.vec*i.vec[1]+y.vec*i.vec[2])==1
                                            ))
ggplot(g.cmp.thresh,
       aes(x=x,y=y,fill=in.thresh,color=in.thresh))+
  geom_tile(alpha=0.5,aes(fill=in.thresh))+
  geom_segment(aes(xend=x+x.vec,yend=y+y.vec),arrow=arrow(length = unit(0.2,"cm")),size=3)+
  labs(color="In Threshold")+guides(fill=FALSE)+
  theme_bw(20)

Looking at Orientations

We can tune the angular acceptance by using the fact \[\vec{x}\cdot\vec{y}=|\vec{x}| |\vec{y}| \cos(\theta_{x\rightarrow y}) \] \[ I(x,y) = \begin{cases} 1, & \cos^{-1}(\vec{f}(x,y)\cdot \vec{i}) \leq \theta^{\circ} \\ 0, & \text{otherwise} \end{cases}\]


i.vec<-c(1,0)
j.vec<-c(0,1)
ang.accept<-function(c.ang) g.cmp.thresh<-cbind(grad.im,
                                                ang.val=c.ang,
                                                in.thresh=with(grad.im,
                                            acos(x.vec*i.vec[1]+y.vec*i.vec[2])<=c.ang/180*pi
                                            ))
ang.vals<-seq(5,180,length.out=9)
ggplot(ldply(ang.vals,ang.accept),
       aes(x=x,y=y,fill=in.thresh,color=in.thresh))+
  geom_tile(alpha=0.5,aes(fill=in.thresh))+
  geom_segment(aes(xend=x+x.vec,yend=y+y.vec),arrow=arrow(length = unit(0.2,"cm")),size=3)+
  facet_wrap(~ang.val)+
  labs(color="In Threshold")+guides(fill=FALSE)
  theme_bw(20)

Other Image Types

Going beyond vector the possibilities for images are limitless. The following shows a tensor plot with the tensor represented as an ellipse. \[ I(x,y) = \hat{f}(x,y) \]

nx<-4
ny<-4
n.pi<-4
grad.im<-expand.grid(x=c(-nx:nx)/nx*n.pi*pi,
                     y=c(-ny:ny)/ny*n.pi*pi)

grad.im<-cbind(grad.im,
               col=1.5*with(grad.im,abs(cos(x*y))/(abs(x*y)+(3*pi/nx)))+
                 0.5*runif(nrow(grad.im)),
              a=with(grad.im,sqrt(2/(abs(x)+0.5))),
               b=with(grad.im,0.5*sqrt(abs(x)+1)),
              th=0.5*runif(nrow(grad.im)),
              aiso=1,count=1)

create.ellipse.points<-function(x.off=0,y.off=0,a=1,b=NULL,th.off=0,th.max=2*pi,pts=36,...) {
  if (is.null(b)) b<-a
  th<-seq(0,th.max,length.out=pts)
  data.frame(x=a*cos(th.off)*cos(th)+b*sin(th.off)*sin(th)+x.off,
             y=-1*a*sin(th.off)*cos(th)+b*cos(th.off)*sin(th)+y.off,
             id=as.factor(paste(x.off,y.off,a,b,th.off,pts,sep=":")),...)
}
deform.ellipse.draw<-function(c.box) {
  create.ellipse.points(x.off=c.box$x[1],
                        y.off=c.box$y[1],
                        a=c.box$a[1],
                        b=c.box$b[1],
                        th.off=c.box$th[1],
                        col=c.box$col[1])                    
}

# normalize vector
tens.im<-ddply(grad.im,.(x,y),deform.ellipse.draw)

ggplot(tens.im,aes(x=x,y=y,group=as.factor(id),fill=col))+
  geom_polygon(color="black")+coord_fixed(ratio=1)+scale_fill_gradient(low="black",high="white")+guides(fill=F)+
  theme_bw(20)

Once the variable count is above 2, individual density functions and a series of cross plots are easier to interpret than some multidimensional density hypervolume.


ggplot(grad.im)+
  geom_histogram(aes(x=a,fill="Width"),alpha=0.7)+
  geom_histogram(aes(x=b,fill="Height"),alpha=0.7)+
  geom_histogram(aes(x=th,fill="Orientation"),alpha=0.7)+
  geom_histogram(aes(x=col,fill="Color"),alpha=0.7)+
  guides(color=F)+labs(fill="Variable")+
  theme_bw(15)
Variable distributions

plot(grad.im[,c("a","b","th","col")])
With Threshold Overlay

Multiple Phases: Segmenting Shale

% Shale provided from Kanitpanyacharoen, W. (2012). Synchrotron X-ray Applications Toward an Understanding of Elastic Anisotropy.


Ideally we would derive 3 values for the thresholds based on a model for the composition of each phase and how much it absorbs, but that is not always possible or practical.

Multiple Segmentations

For this exercise we choose arbitrarily 3 ranges for the different phases and perform visual inspection

plot of chunk unnamed-chunk-30


The relation can explicitly be written out as \[ I(x) = \begin{cases} \text{Void}, & 0 \leq x \leq 0.2 \\ \text{Clay}, & 0.2 < x \leq 0.5 \\ \text{Rock}, & 0.5 < x \end{cases}\]

ggplot(shale.vals,aes(x=x,y=y,fill=th.label,alpha=1-val))+coord_fixed(ratio=1)+
  geom_raster()+guides(fill=F,alpha=F)+theme_bw(20)+facet_wrap(~th.label)
Segmented Images

Implementation

The implementations of basic thresholds and segmentations is very easy since it is a unary operation of a single image \[ f(I(\vec{x})) \] In mathematical terms this is called a map and since it does not require information from neighboring voxels or images it can be calculated for each point independently (parallel). Filters on the other hand almost always depend on neighboring voxels and thus the calculations are not as easy to seperate.

Implementation Code

Matlab

The simplist is a single threshold in Matlab:

thresh_img = gray_img > thresh

A more complicated threshold:

thresh_img = (gray_img > thresh_a) & (gray_img > thresh_b)

Python (numpy)

thresh_img = gray_img > thresh

Python

thresh_img = map(lambda gray_val: gray_val>thresh,
                gray_img)

Java

boolean[] thresh_img = new boolean[x_size*y_size*z_size];
for(int x=x_min;x<x_max;x++)
  for(int y=y_min;y<y_max;y++)
    for(int z=z_min;z<z_max;z++) {
      int offset=(z*y_size+y)*x_size+x;
      thresh_img[offset]=gray_img[offset]>thresh;
    }

In C/C++

bool* thresh_img = malloc(x_size*y_size*z_size * sizeof (bool));

for(int x=x_min;x<x_max;x++)
  for(int y=y_min;y<y_max;y++)
    for(int z=z_min;z<z_max;z++) {
      int offset=(z*y_size+y)*x_size+x;
      thresh_img[offset]=gray_img[offset]>thresh;
    }

Pitfalls with Segmentation

type: alert

Partial Volume Effect

make.circle<-function(nx,r,nscale=1,
                      circ.fun=function(x,y,r) ((x^2+y^2)
Partial Volume Effect


ggplot(sph.list,aes(x=val,color=as.factor(r)))+
  geom_density(trim=T,adjust=1,kernel="biweight")+#facet_wrap(~r)+#scale_y_log10()+
  theme_bw(20)+labs(x="Intensity",y="Frequency",color="Radius")
Partial Volume Effect

pve.table<-ddply(sph.list,.(r),function(c.rad.df) data.frame(m.int=mean(c.rad.df$val),
                                                             m.sd=sd(c.rad.df$val)
                                                             )
                 )
names(pve.table)<-c("Radius","Mean Intensity","Sd Intensity")
kable(pve.table)
Radius Mean Intensity Sd Intensity
2.0000 0.0311250 0.1623387
2.9375 0.0677250 0.2369905
3.8750 0.1177250 0.3061084
4.8125 0.1822250 0.3687992
5.7500 0.2601250 0.4196009
6.6875 0.3511250 0.4567461
7.6250 0.4569250 0.4762676
8.5625 0.5761250 0.4690413
9.5000 0.7072841 0.4258651

Morphology

Returning to the original image of a cross

nx<-8
ny<-8
cross.im<-expand.grid(x=c(-nx:nx)/nx*2*pi,y=c(-ny:ny)/ny*2*pi)
cross.im<-cbind(cross.im,
               col=with(cross.im,1.5*((abs(x)<2) | (abs(y)<2))+
                 0.5*runif(nrow(cross.im))))
thresh.val<-0.75
cross.im$val<-(cross.im$col>=thresh.val)
ggplot(cross.im,aes(x=x,y=y))+
  geom_tile(aes(fill=col))+
  geom_tile(data=subset(cross.im,val),fill="red",color="black",alpha=0.3)+
  geom_tile(data=subset(cross.im,!val),fill="blue",color="black",alpha=0.3)+
  scale_fill_gradient(low="black",high="white")+
  labs(fill="Intensity")+
  theme_bw(20)
Cross With Threshold Overlay

We can now utilize information from neighborhood voxels to improve the results. These steps are called morphological operations.


Like filtering the assumption behind morphological operations are

Therefore these imaging problems can be alleviated by adjusting the balance between local and neighborhood values.

Fundamentals: Neighborhood

A neighborhood consists of the pixels or voxels which are of sufficient proximity to a given point. There are a number of possible definitions which largely affect the result when it is invoked.


The neighborhood is important for a large number of image and other (communication, mapping, networking) processing operations:

Fundamentals: Neighbors in 2D

For standard image operations there are two definitions of neighborhood. The 4 and 8 adjacent neighbors shown below. Given the blue pixel in the center the red are the 4-adjacent and the red and green make up the 8 adjacent.

morph.2d<-expand.grid(x=c(-1,0,1),y=c(-1,0,1))
morph.2d$r<-with(morph.2d,sqrt(x^2+y^2))

ggplot(morph.2d,aes(x=x,y=y))+
  geom_tile(color="black",
            aes(fill=ifelse(r==0,"Pixel of Interest",
                                   ifelse(r==1,"4-Adjacent","8-Adjacent")))
            )+
  geom_text(aes(label=3*(y+1)+x+1))+
  labs(fill="Pixel Type")+
  theme_bw(20)

Formal Neighborhood Definitions

Formally these have two effectively equivalent, but different definitions.

cent.pix<-data.frame(x=c(-1, 1,1,-1,-1)/2,
                     y=c(-1,-1,1, 1,-1)/2)
ggplot(morph.2d,aes(x=x,y=y))+
  geom_tile(color="black",size=1,alpha=0.7,
            aes(fill=ifelse(r==0,"Pixel of Interest",
                                   ifelse(r==1,"4-Adjacent","8-Adjacent")))
            )+
  geom_path(data=cent.pix,color="red",size=3)+
  geom_point(data=cent.pix,color="yellow",size=5)+
  geom_text(aes(label=3*(y+1)+x+1))+
  labs(fill="Pixel Type")+
  theme_bw(20)


Formal Neighborhood Definitions in 3D

In 3D there is now an additional group to consider because of the extra-dimension

morph.3d<-expand.grid(x=c(-1,0,1),y=c(-1,0,1),z=c(-1,0,1))
morph.3d$r<-with(morph.3d,sqrt(x^2+y^2+z^2))

cent.pix.3d<-data.frame(x=c(-1, 1,1,-1,-1,-1, 1,1,-1,-1)/2,
                     y=c(-1,-1,1, 1,-1,-1,-1,1, 1,-1)/2,
                     z=c(-1,-1,-1,-1,-1,1, 1,1, 1, 1))
face.pix.3d<-data.frame(x=c(-1, 1, 1,-1,-1)/2,
                        y=c(-1,-1, 1, 1,-1)/2,
                        z=c( 0, 0, 0, 0, 0))
pixel.label<-function(r) ifelse(r==0,"Pixel of Interest",
                                   ifelse(r==1," 6-Adjacent",
                                          ifelse(r==sqrt(2),"18-Adjacent","26-Adjacent")))
ggplot(morph.3d,aes(x=x,y=y))+
  geom_tile(color="black",size=1,alpha=0.7,
            aes(fill=pixel.label(r))
            )+
  geom_path(data=cent.pix.3d,color="yellow",size=3)+
  geom_point(data=cent.pix.3d,color="purple",size=5)+
  geom_path(data=face.pix.3d,color="red",size=3)+
  geom_point(data=face.pix.3d,color="yellow",size=5)+
  geom_text(aes(label=9*(z+1)+3*(y+1)+x+1))+
  facet_grid(~z)+
  labs(fill="Pixel Type")+
  theme_bw(20)


Erosion and Dilation

Erosion

If any of the voxels in the neighborhood are 0/false than the voxel will be set to 0


Dilation

If any of the voxels in the neigbhorhood are 1/true then the voxel will be set to 1

Applied Erosion and Dilation

Dilation

Taking an image of the cross at a too-high threshold, we show how dilation can be used to recover some of the missing pixels

cross.im$thresh<-cross.im$col>1.75

cross.mat<-df.to.im(cross.im,"thresh",inv=T)
cross.df<-im.to.df(cross.mat)
ggplot(cross.df,aes(x=x,y=y))+
  geom_tile(data=subset(cross.df,val==0),aes(fill="original"),color="black",alpha=0.7)+
  labs(fill="Type")+
  theme_bw(20)
Dilation Example

cross.dil.mat<-imgStdBinaryDilation(cross.mat)
cross.dil.df<-im.to.df(cross.dil.mat)

ggplot(cross.df,aes(x=x,y=y))+
  geom_tile(data=subset(cross.df,val==0),aes(fill="original"),color="black",alpha=0.7)+
  geom_tile(data=subset(cross.dil.df,val==0),aes(fill="post dilation"),color="black",alpha=0.3)+
  labs(fill="Type")+
  theme_bw(20)
Dilation Example


Erosion

Taking an image of the cross at a too-low threshold, we show how erosion can be used to remove some of the extra pixels

cross.im$thresh<-cross.im$col>0.4
cross.mat<-df.to.im(cross.im,"thresh",inv=T)
cross.df<-im.to.df(cross.mat)
ggplot(cross.df,aes(x=x,y=y))+
  geom_tile(data=subset(cross.df,val==0),aes(fill="original"),color="black",alpha=0.7)+
  labs(fill="Type")+
  theme_bw(20)
Erosion Example

cross.ero.mat<-imgStdBinaryErosion(cross.mat)
cross.ero.df<-im.to.df(cross.ero.mat)
ggplot(cross.df,aes(x=x,y=y))+
  geom_tile(data=subset(cross.df,val==0),aes(fill="original"),color="black",alpha=0.7)+
  geom_tile(data=subset(cross.ero.df,val==0),aes(fill="post erosion"),color="black",alpha=0.3)+
  labs(fill="Type")+
  theme_bw(20)
Erosion Example

Opening and Closing

Opening

An erosion followed by a dilation operation


Closing

A dilation followed by an erosion operation

Applied Opening and Closing

Opening

Taking an image of the cross at a too-low threshold, we show how opening can be used to remove some of the extra pixels

cross.im$thresh<-cross.im$col>0.4
cross.mat<-df.to.im(cross.im,"thresh",inv=T)
cross.df<-im.to.df(cross.mat)
ggplot(cross.df,aes(x=x,y=y))+
  geom_tile(data=subset(cross.df,val==0),aes(fill="original"),color="black",alpha=0.7)+
  labs(fill="Type")+
  theme_bw(20)
Erosion Example

cross.ero.mat<-imgStdBinaryOpening(cross.mat)
cross.ero.df<-im.to.df(cross.ero.mat)
ggplot(cross.df,aes(x=x,y=y))+
  geom_tile(data=subset(cross.df,val==0),aes(fill="original"),color="black",alpha=0.7)+
  geom_tile(data=subset(cross.ero.df,val==0),aes(fill="post opening"),color="black",alpha=0.3)+
  labs(fill="Type")+
  theme_bw(20)
Erosion Example


Closing

Taking an image of the cross at a too-high threshold, we show how closing can be used to recover some of the missing pixels

cross.im$thresh<-cross.im$col>1.75

cross.mat<-df.to.im(cross.im,"thresh",inv=T)
cross.df<-im.to.df(cross.mat)
ggplot(cross.df,aes(x=x,y=y))+
  geom_tile(data=subset(cross.df,val==0),aes(fill="original"),color="black",alpha=0.7)+
  labs(fill="Type")+
  theme_bw(20)
Dilation Example

cross.dil.mat<-imgStdBinaryClosing(cross.mat)
cross.dil.df<-im.to.df(cross.dil.mat)

ggplot(cross.df,aes(x=x,y=y))+
  geom_tile(data=subset(cross.df,val==0),aes(fill="original"),color="black",alpha=0.7)+
  geom_tile(data=subset(cross.dil.df,val==0),aes(fill="post closing"),color="black",alpha=0.3)+
  labs(fill="Type")+
  theme_bw(20)
Dilation Example

Applying Morphological Tools


A segmented slice taken from a cortical bone sample. The dark is the calcified bone tissue and the white are holes in the image

Bone Slice

Filling Holes / Creating Masks

cortbone.im<-imagedata(t(png::readPNG("ext-figures/bone.png")),"grey")
cortbone.df<-im.to.df(cortbone.im)
#cortbone.df$val<-255*(cortbone.df$val>0.5)
cortbone.close.im<-imgStdBinaryClosing(cortbone.im)
ggplot(subset(cortbone.df,val<1),aes(x=x,y=518-y))+
  geom_raster(data=subset(im.to.df(cortbone.close.im),val<1),
              aes(fill="closing 3x3"),alpha=0.8)+
  geom_raster(aes(fill="before closing"),alpha=0.6)+
  labs(fill="Image",y="y",x="x",title="3x3 Closing")+
  coord_equal()+
  theme_bw(20)
Cortical Segment closing


cortbone.close.im<-imgStdBinaryClosing(cortbone.im,dim=7)
ggplot(subset(cortbone.df,val<1),aes(x=x,y=600-y))+
  geom_raster(data=subset(im.to.df(cortbone.close.im),val<1),
              aes(fill="closing 7x7"),alpha=0.8)+
  geom_raster(aes(fill="before closing"),alpha=0.6)+
  labs(fill="Image",y="y",x="x",title="7x7 Closing")+
  coord_equal()+
  theme_bw(20)
Cortical Segment closing 7

Filling Holes / Creating Masks

Applying the closing with even larger windows will close everything but being to destroy the underlying structure of the mask

full.kernel<-makeBrush(45,"box")
cortbone.close.im<-closing(cortbone.im<1,full.kernel)
ggplot(subset(cortbone.df,val<1),aes(x=x,y=600-y))+
  geom_raster(data=subset(im.to.df(cortbone.close.im),val>0),
              aes(fill="closing 45x45"),alpha=0.8)+
  geom_raster(aes(fill="before closing"),alpha=0.6)+
  labs(fill="Image",y="x",x="y",title="45x45 Closing")+
  coord_equal()+
  theme_bw(20)
Cortical Segment closing 19


We can characterize this by examining the dependency of the closing size and the volume fraction (note scale)

get.vf<-function(img) (100*sum(img>0)/
               (nrow(cortbone.im)*ncol(cortbone.im)))
vf.of.closing<-function(dim.size) {
  data.frame(dim.size=dim.size,
             vf.full=get.vf(closing(cortbone.im<1,makeBrush(dim.size,"box" )))
             )
  }
pt.list<-c(seq(1,10),seq(11,40,2))
vf.df<-ldply(pt.list,vf.of.closing,.parallel=T)
ggplot(vf.df,aes(x=dim.size))+
  geom_line(aes(y=vf.full))+
  labs(fill="Image",y="Bone Volume Frac(%)",x="Closing Size",title="Closing Size vs Volume Fraction")+
  theme_bw(15)
Cortical Segment closing 19

Different Kernels

Using a better kernel shape (circular, diamond shaped, etc) the artifacts from morphological operations can be reduced

disc.kernel<-makeBrush(45,"disc")
cortbone.close.im<-closing(cortbone.im<1,disc.kernel)
ggplot(subset(cortbone.df,val<1),aes(x=x,y=600-y))+
  geom_raster(data=subset(im.to.df(cortbone.close.im),val>0),
              aes(fill="closing 45x45"),alpha=0.8)+
  geom_raster(aes(fill="before closing"),alpha=0.6)+
  labs(fill="Image",y="x",x="y",title="45x45 Closing")+
  coord_equal()+
  theme_bw(20)
Cortical Segment closing 19


We can characterize this by examining the dependency of the closing size and the volume fraction (note scale)

get.vf<-function(img) (100*sum(img>0)/
               (nrow(cortbone.im)*ncol(cortbone.im)))
vf.of.closing<-function(dim.size) {
  data.frame(dim.size=dim.size,
             vf.full=get.vf(closing(cortbone.im<1,makeBrush(dim.size,"box" ))),
             vf.disc=get.vf(closing(cortbone.im<1,makeBrush(dim.size,"disc"))),
             vf.diam=get.vf(closing(cortbone.im<1,makeBrush(dim.size,"diamond"))),
             vf.vline=get.vf(closing(cortbone.im<1,makeBrush(dim.size,"line",angle=0))),
             vf.hline=get.vf(closing(cortbone.im<1,makeBrush(dim.size,"line",angle=90)))
             )
  }
pt.list<-c(seq(1,9,2),seq(11,60,4))
vf.df<-ldply(pt.list,vf.of.closing,.parallel=T)
ggplot(vf.df,aes(x=dim.size))+
  geom_line(aes(y=vf.full,color="Full"))+
  geom_line(aes(y=vf.disc,color="Circular"))+
  geom_line(aes(y=vf.diam,color="Diamond-Shape"))+
  geom_line(aes(y=vf.vline,color="Vertical Line"))+
  geom_line(aes(y=vf.hline,color="Horizontal Line"))+
  labs(color="Kernel Used",y="Bone Volume Fraction (%)",x="Closing Size",title="Closing Size vs Volume Fraction")+
  theme_bw(20)
Cortical Segment closing Graph

Segmenting Fossils

Taken from the BBC Documentary First Life by David Attenborough

Quantifying Alzheimers: Segment the Cortex

Courtesy of Alberto Alfonso


Quantifying Alzheimers: Segment the Cortex


Surface Area / Perimeter

We see that the dilation and erosion affects are strongly related to the surface area of an object: the more surface area the larger the affect of a single dilation or erosion step.

make.circle<-function(nx,r,nscale=1) {
  ny<-nx
  grad.im<-expand.grid(x=c(-nx:nx)/nscale,y=c(-ny:ny)/nscale)
  grad.im$val<-with(grad.im,(x^2+y^2)
Surface Area Estimation


calc.eros.cnt<-function(nx,r,nscale=1,im.fun=make.circle) {
  in.cir<-im.fun(nx,r,nscale=nscale)
  cir.im<-df.to.im(in.cir)
  data.frame(nx=nx,r=r,nscale=nscale,
             dvolume=sum(dilate(cir.im>0,makeBrush(3,"diamond"))>0),
             evolume=sum(erode(cir.im>0,makeBrush(3,"diamond"))>0),
             volume=sum(cir.im>0)
             )
}
max.rad<-20
r.list<-seq(0.2,max.rad,length.out=20)
r.table<-ldply(r.list,function(r) calc.eros.cnt(max.rad+1,r,1))
r.table$model.perimeter<-with(r.table,2*pi*r)
r.table$estimated.perimeter<-with(r.table,
                                  ((dvolume-volume)+(volume-evolume))/2)
ggplot(r.table,aes(x=r))+
  geom_line(aes(y=estimated.perimeter,color="Morphological Estimation"))+
  geom_line(aes(y=model.perimeter,color="Model Circumference"))+
  labs(x="Radius",y="Perimeter",color="Source")+
  theme_bw(20)
Attenuation to Intensity